library(rms)
## Warning: 패키지 'rms'는 R 버전 4.3.3에서 작성되었습니다
## 필요한 패키지를 로딩중입니다: Hmisc
## Warning: 패키지 'Hmisc'는 R 버전 4.3.3에서 작성되었습니다
## 
## 다음의 패키지를 부착합니다: 'Hmisc'
## The following objects are masked from 'package:base':
## 
##     format.pval, units
## Warning in .recacheSubclasses(def@className, def, env): 클래스 "replValueSp"의
## 서브 클래스 "ndiMatrix"가 정의되지 않았습니다; 업데이트된 정의가 아닙니다
library(e1071)
## Warning: 패키지 'e1071'는 R 버전 4.3.3에서 작성되었습니다
## 
## 다음의 패키지를 부착합니다: 'e1071'
## The following object is masked from 'package:Hmisc':
## 
##     impute
require(rjags)
## 필요한 패키지를 로딩중입니다: rjags
## Warning: 패키지 'rjags'는 R 버전 4.3.3에서 작성되었습니다
## 필요한 패키지를 로딩중입니다: coda
## Warning: 패키지 'coda'는 R 버전 4.3.3에서 작성되었습니다
## Linked to JAGS 4.3.1
## Loaded modules: basemod,bugs
require(runjags)
## 필요한 패키지를 로딩중입니다: runjags
## Warning: 패키지 'runjags'는 R 버전 4.3.3에서 작성되었습니다
require(coda)

## Data Load & Preprocessing -------------------------------
data22 <- read.csv("D:/0.KAIST_DHCSS(Master)/석사_1기/Bayesian Statistics/Project/data2022_순위.csv", fileEncoding = "euc-kr")

colnames(data22)
##  [1] "시도명"                      "시군구명"                   
##  [3] "aver_per_rank"               "SIGNGU_CD"                  
##  [5] "대학교_수"                   "창조산업_수"                
##  [7] "음식점업_수"                 "공원_수"                    
##  [9] "인구십만명당_문화기반시설수" "인구_천명당_외국인_수"      
## [11] "수도권"                      "ratio_crea"
colnames(data22) <- c("Si_do_NM", "Si-gun-gu_NM", "aver_per_rank", "SIGNGU_CD", "Study_uni", "Study_industries", "Rest_cafe",
                      "Rest_park", "Culture_building", "Culture_people", "Capital", "Creative_Class")
colnames(data22)
##  [1] "Si_do_NM"         "Si-gun-gu_NM"     "aver_per_rank"    "SIGNGU_CD"       
##  [5] "Study_uni"        "Study_industries" "Rest_cafe"        "Rest_park"       
##  [9] "Culture_building" "Culture_people"   "Capital"          "Creative_Class"
describe(data22) #결측치 없음. 
## data22 
## 
##  12  Variables      74  Observations
## --------------------------------------------------------------------------------
## Si_do_NM 
##        n  missing distinct 
##       74        0        7 
##                                                                             
## Value      광주광역시 대구광역시 대전광역시 부산광역시 서울특별시 울산광역시
## Frequency      5          8          5         16         25          5     
## Proportion 0.068      0.108      0.068      0.216      0.338      0.068     
##                      
## Value      인천광역시
## Frequency     10     
## Proportion 0.135     
## --------------------------------------------------------------------------------
## Si-gun-gu_NM 
##        n  missing distinct 
##       74        0       53 
## 
## lowest : 강남구   강동구   강북구   강서구   강화군  , highest: 은평구   종로구   중구     중랑구   해운대구
## --------------------------------------------------------------------------------
## aver_per_rank 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1   0.4928   0.2772  0.07465  0.17962 
##      .25      .50      .75      .90      .95 
##  0.33534  0.45585  0.66066  0.82915  0.91150 
## 
## lowest : 0         0.0140845 0.028169  0.056338  0.084507 
## highest: 0.91071   0.912968  0.943662  0.957746  1        
## --------------------------------------------------------------------------------
## SIGNGU_CD 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1    22446     8401    11210    11294 
##      .25      .50      .75      .90      .95 
##    11568    26455    28243    30161    31121 
## 
## lowest : 11110 11140 11170 11200 11215, highest: 31110 31140 31170 31200 31710
## --------------------------------------------------------------------------------
## Study_uni 
##        n  missing distinct     Info     Mean      Gmd 
##       74        0        7    0.943    1.662     1.87 
##                                                     
## Value          0     1     2     3     4     5     6
## Frequency     25    17    11     9     7     1     4
## Proportion 0.338 0.230 0.149 0.122 0.095 0.014 0.054
## 
## For the frequency table, variable is rounded to the nearest 0
## --------------------------------------------------------------------------------
## Study_industries 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       73        1     3799     3220    784.5   1237.4 
##      .25      .50      .75      .90      .95 
##   1717.0   2920.5   4243.8   6930.3   9575.0 
## 
## lowest :    92   439   549   778   788, highest:  9357  9980 10170 16271 27671
## --------------------------------------------------------------------------------
## Rest_cafe 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1     2010     1207    697.0    839.7 
##      .25      .50      .75      .90      .95 
##   1212.8   1802.0   2470.2   3326.3   3788.6 
## 
## lowest :  142  253  553  643  726, highest: 3698 3957 4304 4435 7756
## --------------------------------------------------------------------------------
## Rest_park 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       64        1    86.96    61.83    10.00    19.90 
##      .25      .50      .75      .90      .95 
##    43.75    80.50   123.25   160.00   190.35 
## 
## lowest :   0   2   7  10  15, highest: 190 191 192 219 233
## --------------------------------------------------------------------------------
## Culture_building 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       48    0.999     6.05    4.849    2.200    2.600 
##      .25      .50      .75      .90      .95 
##    3.000    4.100    5.675   11.690   17.950 
## 
## lowest : 2    2.1  2.2  2.3  2.4 , highest: 17.6 18.6 18.9 25.7 48.1
## --------------------------------------------------------------------------------
## Culture_people 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1    21.48    17.75    6.024    6.609 
##      .25      .50      .75      .90      .95 
##    9.475   14.675   26.927   47.903   58.863 
## 
## lowest : 3.35  3.55  4.03  5.29  6.42 , highest: 58.44 59.65 66.93 76.62 85.92
## --------------------------------------------------------------------------------
## Capital 
##        n  missing distinct     Info      Sum     Mean      Gmd 
##       74        0        2    0.748       35    0.473   0.5054 
## 
## --------------------------------------------------------------------------------
## Creative_Class 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1   0.2032   0.1769  0.07055  0.07920 
##      .25      .50      .75      .90      .95 
##  0.09561  0.12506  0.19413  0.38002  0.61960 
## 
## lowest : 0.0387619 0.0672383 0.0686839 0.0697678 0.0709679
## highest: 0.610516  0.636476  0.799383  0.846759  1.56113  
## --------------------------------------------------------------------------------
# 스케일링할 열

for (j in c("Study_uni", "Study_industries", "Rest_cafe",
            "Rest_park", "Culture_building", "Culture_people", "Creative_Class")) 
{
  hist(data22[,j], main=j, 
       xlab = skewness(data22[,j]))  
}

# 왜도가 심함.
# Standardization or centering: Use 'scale()' function.
# Yeo-Johnson transformation
# install.packages('bestNormalize')
library(bestNormalize) # Study_uni & Culture_building 
## Warning: 패키지 'bestNormalize'는 R 버전 4.3.3에서 작성되었습니다
data22$Study_uni = yeojohnson(data22$Study_uni)$x.t
data22$Study_industries = yeojohnson(data22$Study_industries)$x.t
data22$Rest_cafe = yeojohnson(data22$Rest_cafe)$x.t
data22$Rest_park = yeojohnson(data22$Rest_park)$x.t
data22$Culture_building = yeojohnson(data22$Culture_building)$x.t
data22$Culture_people = yeojohnson(data22$Culture_people)$x.t
data22$Creative_Class = yeojohnson(data22$Creative_Class)$x.t

for (j in c("Study_uni", "Study_industries", "Rest_cafe",
            "Rest_park", "Culture_building", "Culture_people", "Creative_Class")) 
{
  hist(data22[,j], main=j, 
       xlab = skewness(data22[,j]))  
}

X = data22[, c("Study_uni", "Study_industries", "Rest_cafe",
               "Rest_park", "Culture_building", "Culture_people", "Creative_Class")]

## Modeling ----------------------
myData = data22
y = myData$aver_per_rank
x1 = myData$Study_uni
x2 = myData$Study_industries
x3 = myData$Rest_cafe
x4 = myData$Rest_park
x5 = myData$Culture_building
x6 = myData$Culture_people
x7 = myData$Creative_Class
nn = length(y)

dataList = list(
  y=y, x1=x1, x2=x2, x3=x3, x4=x4, x5=x5, x6=x6, x7=x7, Ntotal=nn
)

M1_2018

# Define the model with no individual differences
modelString_M1_2022 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    nu ~ dexp(0.0333)
    }
"
# MCMC run
myinits_M12022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)))

out_M12022 <- run.jags (model=modelString_M1_2022, data=dataList, inits=myinits_M12022,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "sigma", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## module dic loaded
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M12022) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                              
##         Lower95    Median  Upper95      Mean       SD Mode      MCerr MC%ofSD
## b0      0.43238    0.4916  0.54731   0.49183 0.029377   -- 0.00030191       1
## b1    -0.029838  0.031227 0.091352  0.031604 0.030919   -- 0.00038556     1.2
## b2     -0.28548  -0.11068 0.080129  -0.11042 0.093921   --  0.0039159     4.2
## b3    -0.063886  0.078091  0.22873  0.078535 0.075075   --  0.0025061     3.3
## b4      -0.1064  -0.02033 0.067001 -0.020398 0.044218   -- 0.00088156       2
## b5     -0.14309 -0.060893 0.019383 -0.060996  0.04171   -- 0.00094425     2.3
## b6    -0.088575  -0.02419 0.038955  -0.02468 0.032562   -- 0.00049051     1.5
## b7    -0.072327   0.04116  0.16277  0.042032 0.060121   --  0.0018915     3.1
## sigma   0.19946   0.24104  0.28914   0.24238 0.023012   -- 0.00028135     1.2
## nu       2.9898    33.758   104.58    42.643   32.733   --    0.56103     1.7
##                              
##       SSeff      AC.10   psrf
## b0     9468  0.0077825      1
## b1     6431   0.014402      1
## b2      575    0.45808 1.0075
## b3      897    0.31081 1.0034
## b4     2516   0.086432 1.0009
## b5     1951    0.12172 1.0048
## b6     4407   0.045154 1.0001
## b7     1010    0.25559 1.0055
## sigma  6690 -0.0010302 1.0009
## nu     3404   0.024166 1.0001
## 
## Model fit assessment:
## DIC = 14.09292
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 8.95576
## 
## Total time taken: 50.5 seconds
plot(out_M12022) 
## Generating plots...

## PVAF --------------------------
b00_M1 <- as.mcmc.list(out_M12022, vars = "b0")
b0_M1 <- as.numeric(unlist(b00_M1[, 1]))
b11_M1 <- as.mcmc.list(out_M12022, vars = "b1")
b1_M1 <- as.numeric(unlist(b11_M1[, 1]))
b22_M1 <- as.mcmc.list(out_M12022, vars = "b2")
b2_M1 <- as.numeric(unlist(b22_M1[, 1]))
b33_M1 <- as.mcmc.list(out_M12022, vars = "b3")
b3_M1 <- as.numeric(unlist(b33_M1[, 1]))
b44_M1 <- as.mcmc.list(out_M12022, vars = "b4")
b4_M1 <- as.numeric(unlist(b44_M1[, 1]))
b55_M1 <- as.mcmc.list(out_M12022, vars = "b5")
b5_M1 <- as.numeric(unlist(b55_M1[, 1]))
b66_M1 <- as.mcmc.list(out_M12022, vars = "b6")
b6_M1 <- as.numeric(unlist(b66_M1[, 1]))
b77_M1 <- as.mcmc.list(out_M12022, vars = "b7")
b7_M1 <- as.numeric(unlist(b77_M1[, 1]))

sigmas_M1 <- as.mcmc.list(out_M12022, vars = "sigma")
s_M1 <- as.numeric(unlist(sigmas_M1[, 1]))
nus_M1 <-  as.mcmc.list(out_M12022, vars = "nu")
nu_M1 <- as.numeric(unlist(nus_M1[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1[j*10] +
      b1_M1[j*10] * x1[j] +
      b2_M1[j*10] * x2[j] +
      b3_M1[j*10] * x3[j] +
      b4_M1[j*10] * x4[j] +
      b5_M1[j*10] * x5[j] +
      b6_M1[j*10] * x6[j] +
      b7_M1[j*10] * x7[j] + s_M1[j*10]*rt(1, nu_M1[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1[i*400] + 
      b1_M1[i*400] * X1 + 
      b2_M1[i*400] * X2 +
      b3_M1[i*400] * X3 +
      b4_M1[i*400] * X4 +
      b5_M1[i*400] * X5 +
      b6_M1[i*400] * X6 +
      b7_M1[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1[i*400] + 
      b1_M1[i*400] * x1 + 
      b2_M1[i*400] * x2 +
      b3_M1[i*400] * x3 +
      b4_M1[i*400] * x4 +
      b5_M1[i*400] * x5 +
      b6_M1[i*400] * x6 +
      b7_M1[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1 <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1[i*400] + 
    b1_M1[i*400] * X1 + 
    b2_M1[i*400] * X2 +
    b3_M1[i*400] * X3 +
    b4_M1[i*400] * X4 +
    b5_M1[i*400] * X5 +
    b6_M1[i*400] * X6 +
    b7_M1[i*400] * X7  
    
  
  y.prd <- b0_M1[i*400] + 
    b1_M1[i*400] * x1 + 
    b2_M1[i*400] * x2 +
    b3_M1[i*400] * x3 +
    b4_M1[i*400] * x4 +
    b5_M1[i*400] * x5 +
    b6_M1[i*400] * x6 +
    b7_M1[i*400] * x7 
  
  pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1), "\n") #  -10.78586   
## Mean Percent Variance Accounted For (PVAF): -0.03160387
# Normality check for residuals
residu <- mean(b0_M1) + mean(b1_M1)*x1 + mean(b2_M1)*x2 + mean(b3_M1)*x3 + 
  mean(b4_M1)*x4 + mean(b5_M1)*x5 + mean(b6_M1)*x6 + mean(b7_M1)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1n_2018

# Define the model with no individual differences
modelString_M1s_2022 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
      
    }
"
# MCMC run
myinits_M1s2022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)),
                    list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)),
                    list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)))

out_M1s2022 <- run.jags (model=modelString_M1s_2022, data=dataList, inits=myinits_M1s2022,
                         n.chains=3,
                    adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                  "b5","b6", "b7", "sigma", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 9 variables....
## Finished running the simulation
print(out_M1s2022) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                              
##         Lower95    Median  Upper95      Mean       SD Mode      MCerr MC%ofSD
## b0      0.43411   0.49281  0.54933   0.49293 0.029442   -- 0.00023777     0.8
## b1     -0.02946  0.031163  0.09387  0.031179 0.031401   -- 0.00039685     1.3
## b2     -0.29307 -0.097977 0.093711 -0.097359 0.099216   --  0.0043475     4.4
## b3    -0.086703  0.068888  0.21907  0.068878 0.077619   --  0.0028063     3.6
## b4     -0.10878 -0.022289 0.065753 -0.022341 0.044275   -- 0.00096399     2.2
## b5     -0.14569 -0.060732 0.021801 -0.061152 0.042532   -- 0.00099877     2.3
## b6    -0.091524 -0.024416 0.038283 -0.024824 0.032944   -- 0.00058771     1.8
## b7    -0.083851  0.038153  0.16087  0.039332 0.062689   --  0.0019673     3.1
## sigma   0.20948   0.24847  0.29437   0.25001 0.022053   -- 0.00026997     1.2
##                              
##       SSeff      AC.10   psrf
## b0    15332 -0.0025062 1.0002
## b1     6261   0.026848 1.0003
## b2      521    0.50658 1.0034
## b3      765    0.37359 1.0057
## b4     2109   0.091062 1.0006
## b5     1813    0.13257 1.0008
## b6     3142   0.061599 1.0003
## b7     1015    0.29083 1.0015
## sigma  6673   0.004012 1.0002
## 
## Model fit assessment:
## DIC = 13.23549
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 8.98433
## 
## Total time taken: 9.1 seconds
plot(out_M1s2022) 
## Generating plots...

## PVAF --------------------------
b00_M1s <- as.mcmc.list(out_M1s2022, vars = "b0")
b0_M1s <- as.numeric(unlist(b00_M1s[, 1]))
b11_M1s <- as.mcmc.list(out_M1s2022, vars = "b1")
b1_M1s <- as.numeric(unlist(b11_M1s[, 1]))
b22_M1s <- as.mcmc.list(out_M1s2022, vars = "b2")
b2_M1s <- as.numeric(unlist(b22_M1s[, 1]))
b33_M1s <- as.mcmc.list(out_M1s2022, vars = "b3")
b3_M1s <- as.numeric(unlist(b33_M1s[, 1]))
b44_M1s <- as.mcmc.list(out_M1s2022, vars = "b4")
b4_M1s <- as.numeric(unlist(b44_M1s[, 1]))
b55_M1s <- as.mcmc.list(out_M1s2022, vars = "b5")
b5_M1s <- as.numeric(unlist(b55_M1s[, 1]))
b66_M1s <- as.mcmc.list(out_M1s2022, vars = "b6")
b6_M1s <- as.numeric(unlist(b66_M1s[, 1]))
b77_M1s <- as.mcmc.list(out_M1s2022, vars = "b7")
b7_M1s <- as.numeric(unlist(b77_M1s[, 1]))

sigmas_M1s <- as.mcmc.list(out_M1s2022, vars = "sigma")
s_M1s <- as.numeric(unlist(sigmas_M1s[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1s[j*10] +
      b1_M1s[j*10] * x1[j] +
      b2_M1s[j*10] * x2[j] +
      b3_M1s[j*10] * x3[j] +
      b4_M1s[j*10] * x4[j] +
      b5_M1s[j*10] * x5[j] +
      b6_M1s[j*10] * x6[j] +
      b7_M1s[j*10] * x7[j] + rnorm(1, 0, s_M1s[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1s <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1s[i*400] + 
      b1_M1s[i*400] * X1 + 
      b2_M1s[i*400] * X2 +
      b3_M1s[i*400] * X3 +
      b4_M1s[i*400] * X4 +
      b5_M1s[i*400] * X5 +
      b6_M1s[i*400] * X6 +
      b7_M1s[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1s[i*400] + 
      b1_M1s[i*400] * x1 + 
      b2_M1s[i*400] * x2 +
      b3_M1s[i*400] * x3 +
      b4_M1s[i*400] * x4 +
      b5_M1s[i*400] * x5 +
      b6_M1s[i*400] * x6 +
      b7_M1s[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
  
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1s <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1s[i*400] + 
    b1_M1s[i*400] * X1 + 
    b2_M1s[i*400] * X2 +
    b3_M1s[i*400] * X3 +
    b4_M1s[i*400] * X4 +
    b5_M1s[i*400] * X5 +
    b6_M1s[i*400] * X6 +
    b7_M1s[i*400] * X7 + 
    rnorm(1, 0, s_M1s[i*400])
  y.prd <- b0_M1s[i*400] + 
    b1_M1s[i*400] * x1 + 
    b2_M1s[i*400] * x2 +
    b3_M1s[i*400] * x3 +
    b4_M1s[i*400] * x4 +
    b5_M1s[i*400] * x5 +
    b6_M1s[i*400] * x6 +
    b7_M1s[i*400] * x7 
    
  pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1s), "\n") # -0.4804828 
## Mean Percent Variance Accounted For (PVAF): -0.04871845
# Normality check for residuals
residu <- mean(b0_M1s) + mean(b1_M1s)*x1 + mean(b2_M1s)*x2 + mean(b3_M1s)*x3 + 
  mean(b4_M1s)*x4 + mean(b5_M1s)*x5 + mean(b6_M1s)*x6 + mean(b7_M1s)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1x_2018

# Define the model with no individual differences
modelString_M1x_2022 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    nu ~ dexp(0.0333)
    }
"
# MCMC run
myinits_M1x_2022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)))

out_M1x2022 <- run.jags (model=modelString_M1x_2022, data=dataList, inits=myinits_M1x_2022,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "b23", "sigma", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1x2022) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                               
##         Lower95     Median  Upper95      Mean       SD Mode      MCerr MC%ofSD
## b0      0.43369    0.49797  0.56645   0.49787 0.033578   -- 0.00045416     1.4
## b1    -0.034442   0.029071 0.089873  0.029282 0.031548   -- 0.00039065     1.2
## b2     -0.29497   -0.10471 0.082528  -0.10532 0.098277   --  0.0042195     4.3
## b3    -0.075828   0.074758  0.22208   0.07577 0.077211   --  0.0026612     3.4
## b4     -0.10755  -0.021843 0.067585  -0.02201 0.044959   -- 0.00099234     2.2
## b5     -0.14333   -0.05824 0.027419 -0.058717 0.043284   --  0.0010051     2.3
## b6    -0.090441  -0.024566 0.040054 -0.024786 0.033076   -- 0.00054406     1.6
## b7    -0.077683   0.040018  0.16855  0.041271 0.062315   --  0.0019697     3.2
## b23    -0.04204 -0.0059478 0.031657 -0.005634 0.018619   -- 0.00028209     1.5
## sigma   0.20065    0.24266  0.29301   0.24417 0.023561   -- 0.00029811     1.3
## nu       2.7858     33.061   108.41    41.973   32.559   --    0.53396     1.6
##                               
##       SSeff      AC.10    psrf
## b0     5466  0.0089248  1.0009
## b1     6522  0.0075807  1.0001
## b2      542    0.48497  1.0007
## b3      842    0.34171  1.0004
## b4     2053   0.096279  1.0004
## b5     1855     0.1125  1.0007
## b6     3696   0.059209  1.0007
## b7     1001    0.28377  1.0005
## b23    4357   0.010801  1.0015
## sigma  6246    0.01587 0.99999
## nu     3718 -0.0028455       1
## 
## Model fit assessment:
## DIC = 16.23796
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 10.0753
## 
## Total time taken: 53 seconds
plot(out_M1x2022) 
## Generating plots...

## PVAF --------------------------
b00_M1x <- as.mcmc.list(out_M1x2022, vars = "b0")
b0_M1x <- as.numeric(unlist(b00_M1x[, 1]))
b11_M1x <- as.mcmc.list(out_M1x2022, vars = "b1")
b1_M1x <- as.numeric(unlist(b11_M1x[, 1]))
b22_M1x <- as.mcmc.list(out_M1x2022, vars = "b2")
b2_M1x <- as.numeric(unlist(b22_M1x[, 1]))
b33_M1x <- as.mcmc.list(out_M1x2022, vars = "b3")
b3_M1x <- as.numeric(unlist(b33_M1x[, 1]))
b44_M1x <- as.mcmc.list(out_M1x2022, vars = "b4")
b4_M1x <- as.numeric(unlist(b44_M1x[, 1]))
b55_M1x <- as.mcmc.list(out_M1x2022, vars = "b5")
b5_M1x <- as.numeric(unlist(b55_M1x[, 1]))
b66_M1x <- as.mcmc.list(out_M1x2022, vars = "b6")
b6_M1x <- as.numeric(unlist(b66_M1x[, 1]))
b77_M1x <- as.mcmc.list(out_M1x2022, vars = "b7")
b7_M1x <- as.numeric(unlist(b77_M1x[, 1]))
b233_M1x <- as.mcmc.list(out_M1x2022, vars = "b23")
b23_M1x <- as.numeric(unlist(b233_M1x[, 1]))

sigmas_M1x <- as.mcmc.list(out_M1x2022, vars = "sigma")
s_M1x <- as.numeric(unlist(sigmas_M1x[, 1]))
nus_M1x <-  as.mcmc.list(out_M1x2022, vars = "nu")
nu_M1x <- as.numeric(unlist(nus_M1x[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1x[j*10] +
      b1_M1x[j*10] * x1[j] +
      b2_M1x[j*10] * x2[j] +
      b3_M1x[j*10] * x3[j] +
      b4_M1x[j*10] * x4[j] +
      b5_M1x[j*10] * x5[j] +
      b6_M1x[j*10] * x6[j] +
      b7_M1x[j*10] * x7[j] + b23_M1x[j*10]*x2[j]*x3[j] + s_M1x[j*10]*rt(1, nu_M1x[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1x[i*400] + 
      b1_M1x[i*400] * X1 + 
      b2_M1x[i*400] * X2 +
      b3_M1x[i*400] * X3 +
      b4_M1x[i*400] * X4 +
      b5_M1x[i*400] * X5 +
      b6_M1x[i*400] * X6 +
      b7_M1x[i*400] * X7 + b23_M1x[i*400]*X2*X3
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1x[i*400] +
      b1_M1x[i*400] * x1 +
      b2_M1x[i*400] * x2 +
      b3_M1x[i*400] * x3 +
      b4_M1x[i*400] * x4 +
      b5_M1x[i*400] * x5 +
      b6_M1x[i*400] * x6 +
      b7_M1x[i*400] * x7 + b23_M1x[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1x <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1x[i*400] + 
    b1_M1x[i*400] * X1 + 
    b2_M1x[i*400] * X2 +
    b3_M1x[i*400] * X3 +
    b4_M1x[i*400] * X4 +
    b5_M1x[i*400] * X5 +
    b6_M1x[i*400] * X6 +
    b7_M1x[i*400] * X7 + b23_M1x[i*400]*X2*X3
  
  
  y.prd <- b0_M1x[i*400] +
    b1_M1x[i*400] * x1 +
    b2_M1x[i*400] * x2 +
    b3_M1x[i*400] * x3 +
    b4_M1x[i*400] * x4 +
    b5_M1x[i*400] * x5 +
    b6_M1x[i*400] * x6 +
    b7_M1x[i*400] * x7 + b23_M1x[i*400]*x2*x3
  
  pvaf_M1x[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1x), "\n") # -0.2739477 
## Mean Percent Variance Accounted For (PVAF): -0.08769031
# Normality check for residuals
residu <- mean(b0_M1x) + mean(b1_M1x)*x1 + mean(b2_M1x)*x2 + mean(b3_M1x)*x3 + 
  mean(b4_M1x)*x4 + mean(b5_M1x)*x5 + mean(b6_M1x)*x6 + mean(b7_M1x)*x7 + mean(b23_M1x)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1xn_2018

# Define the model with no individual differences
modelString_M1xn_2022 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
      
    }
"
# MCMC run
myinits_M1xn_2022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)))

out_M1xn2022 <- run.jags (model=modelString_M1xn_2022, data=dataList, inits=myinits_M1xn_2022,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "b23", "sigma", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M1xn2022) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                                
##         Lower95     Median  Upper95       Mean       SD Mode      MCerr MC%ofSD
## b0      0.43684    0.49997  0.56962    0.50013 0.033817   -- 0.00035511     1.1
## b1    -0.032629    0.02967 0.092048   0.029628 0.031892   -- 0.00043106     1.4
## b2     -0.30286   -0.10446 0.081382   -0.10431 0.099123   --  0.0044713     4.5
## b3    -0.085501   0.076832  0.22821   0.077368 0.079155   --  0.0027933     3.5
## b4     -0.11651  -0.023978 0.062548  -0.023857 0.044881   --  0.0009705     2.2
## b5     -0.14366  -0.057175 0.030113  -0.057857 0.044125   --  0.0010153     2.3
## b6    -0.092294  -0.026525 0.040006  -0.026675 0.033602   --  0.0005661     1.7
## b7    -0.080399   0.041547  0.16494   0.041849 0.062241   --  0.0020604     3.3
## b23   -0.043016 -0.0074403 0.026296 -0.0075921   0.0177   --  0.0002102     1.2
## sigma   0.20943    0.24983  0.29511    0.25131 0.022193   -- 0.00024949     1.1
##                               
##       SSeff      AC.10    psrf
## b0     9069  0.0035909       1
## b1     5474   0.031339  1.0002
## b2      491    0.50136  1.0017
## b3      803    0.34549   1.003
## b4     2139   0.093141  1.0001
## b5     1889    0.13902  1.0001
## b6     3523   0.061707  1.0002
## b7      912    0.30214  1.0003
## b23    7091 -0.0015121 0.99994
## sigma  7913 -0.0079336  1.0001
## 
## Model fit assessment:
## DIC = 15.11651
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 9.99158
## 
## Total time taken: 9 seconds
plot(out_M1xn2022) 
## Generating plots...

## PVAF --------------------------
b00_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b0")
b0_M1xn <- as.numeric(unlist(b00_M1xn[, 1]))
b11_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b1")
b1_M1xn <- as.numeric(unlist(b11_M1xn[, 1]))
b22_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b2")
b2_M1xn <- as.numeric(unlist(b22_M1xn[, 1]))
b33_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b3")
b3_M1xn <- as.numeric(unlist(b33_M1xn[, 1]))
b44_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b4")
b4_M1xn <- as.numeric(unlist(b44_M1xn[, 1]))
b55_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b5")
b5_M1xn <- as.numeric(unlist(b55_M1xn[, 1]))
b66_M1xm <- as.mcmc.list(out_M1xn2022, vars = "b6")
b6_M1xn <- as.numeric(unlist(b66_M1xm[, 1]))
b77_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b7")
b7_M1xn <- as.numeric(unlist(b77_M1xn[, 1]))
b233_M1xn <- as.mcmc.list(out_M1xn2022, vars = "b23")
b23_M1xn <- as.numeric(unlist(b233_M1xn[, 1]))


sigmas_M1xn <- as.mcmc.list(out_M1xn2022, vars = "sigma")
s_M1xn <- as.numeric(unlist(sigmas_M1xn[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xn[j*10] +
      b1_M1xn[j*10] * x1[j] +
      b2_M1xn[j*10] * x2[j] +
      b3_M1xn[j*10] * x3[j] +
      b4_M1xn[j*10] * x4[j] +
      b5_M1xn[j*10] * x5[j] +
      b6_M1xn[j*10] * x6[j] +
      b7_M1xn[j*10] * x7[j] + b23_M1xn[j*10]*x2[j]*x3[j] +rnorm(1, 0, s_M1xn[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1s <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * X1 + 
      b2_M1xn[i*400] * X2 +
      b3_M1xn[i*400] * X3 +
      b4_M1xn[i*400] * X4 +
      b5_M1xn[i*400] * X5 +
      b6_M1xn[i*400] * X6 +
      b7_M1xn[i*400] * X7 + b23_M1xn[i*400]*X2*X3

    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * x1 + 
      b2_M1xn[i*400] * x2 +
      b3_M1xn[i*400] * x3 +
      b4_M1xn[i*400] * x4 +
      b5_M1xn[i*400] * x5 +
      b6_M1xn[i*400] * x6 +
      b7_M1xn[i*400] * x7 + b23_M1xn[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1s <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * X1 + 
      b2_M1xn[i*400] * X2 +
      b3_M1xn[i*400] * X3 +
      b4_M1xn[i*400] * X4 +
      b5_M1xn[i*400] * X5 +
      b6_M1xn[i*400] * X6 +
      b7_M1xn[i*400] * X7 + b23_M1xn[j*10]*X3*X3 
  
  y.prd <- b0_M1xn[i*400] + 
    b1_M1xn[i*400] * x1 + 
    b2_M1xn[i*400] * x2 +
    b3_M1xn[i*400] * x3 +
    b4_M1xn[i*400] * x4 +
    b5_M1xn[i*400] * x5 +
    b6_M1xn[i*400] * x6 +
    b7_M1xn[i*400] * x7 + b23_M1xn[i*400]*x2*x3
  
  pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1s), "\n") #  -10.16325 
## Mean Percent Variance Accounted For (PVAF): -0.05931823
# Normality check for residuals
residu <- mean(b0_M1xn) + mean(b1_M1xn)*x1 + mean(b2_M1xn)*x2 + mean(b3_M1xn)*x3 + 
  mean(b4_M1xn)*x4 + mean(b5_M1xn)*x5 + mean(b6_M1xn)*x6 + mean(b7_M1xn)*x7 + mean(b23_M1xn)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1h_2018

modelString_M1h_2022 = "
model{
  for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2, nu)
    }
      
  # priors vague 
  b0 ~ dnorm(0, 1/2^2)
  b1 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b2 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b3 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b4 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b5 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b6 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b7 ~ dt(0.0, 1/sigmaBeta^2, 1)
  
  # Sigma
  sigma ~ dgamma(0.01, 0.01)
  nu ~ dexp(0.0333)
  sigmaBeta ~ dgamma(2.618, 1.618) 
}
"

# MCMC run
myinits_M1h2022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)),
                        list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)),
                        list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)))

out_M1h2022 <- run.jags(model=modelString_M1h_2022, data=dataList, inits=myinits_M1h2022,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "sigma", "nu", "sigmaBeta","pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1h2022) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                            
##             Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0          0.43506      0.492  0.54892    0.49221 0.029106   --  0.0003037
## b1         -0.02925  0.0079355 0.060972   0.011541 0.022617   -- 0.00033031
## b2        -0.095722 -0.0050265 0.054226  -0.011494 0.036178   -- 0.00091022
## b3        -0.051104  0.0036557 0.081195  0.0084771 0.031487   -- 0.00068718
## b4        -0.068685 -0.0055151 0.036728 -0.0095374  0.02566   -- 0.00042132
## b5        -0.083262  -0.015354 0.027417  -0.021317  0.02898   -- 0.00056606
## b6        -0.058183 -0.0053369 0.032886 -0.0079801 0.022086   --  0.0002842
## b7        -0.056637 0.00019573 0.062495  0.0016687 0.028185   -- 0.00061471
## sigma       0.19948    0.23921   0.2847    0.24063 0.022103   -- 0.00025526
## nu           3.7289     34.739   107.46     43.247   32.308   --    0.53691
## sigmaBeta 0.0014362   0.024932  0.08607   0.033105 0.029923   -- 0.00097821
##                                          
##           MC%ofSD SSeff      AC.10   psrf
## b0              1  9185 -0.0037866 1.0002
## b1            1.5  4688   0.025943 1.0021
## b2            2.5  1580     0.1766  1.007
## b3            2.2  2100    0.10791 1.0067
## b4            1.6  3709   0.037778  1.001
## b5              2  2621   0.083472 1.0014
## b6            1.3  6040  0.0018856 1.0006
## b7            2.2  2102    0.11061 1.0016
## sigma         1.2  7497  -0.014894      1
## nu            1.7  3621    0.02814 1.0001
## sigmaBeta     3.3   936    0.29767 1.0018
## 
## Model fit assessment:
## DIC = 9.5723
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 5.69082
## 
## Total time taken: 47.9 seconds
plot(out_M1h2022) 
## Generating plots...

## PVAF --------------------------
b00_M1h <- as.mcmc.list(out_M1h2022, vars = "b0")
b0_M1h <- as.numeric(unlist(b00_M1h[, 1]))
b11_M1h <- as.mcmc.list(out_M1h2022, vars = "b1")
b1_M1h <- as.numeric(unlist(b11_M1h[, 1]))
b22_M1h <- as.mcmc.list(out_M1h2022, vars = "b2")
b2_M1h <- as.numeric(unlist(b22_M1h[, 1]))
b33_M1h <- as.mcmc.list(out_M1h2022, vars = "b3")
b3_M1h <- as.numeric(unlist(b33_M1h[, 1]))
b44_M1h <- as.mcmc.list(out_M1h2022, vars = "b4")
b4_M1h <- as.numeric(unlist(b44_M1h[, 1]))
b55_M1h <- as.mcmc.list(out_M1h2022, vars = "b5")
b5_M1h <- as.numeric(unlist(b55_M1h[, 1]))
b66_M1h <- as.mcmc.list(out_M1h2022, vars = "b6")
b6_M1h <- as.numeric(unlist(b66_M1h[, 1]))
b77_M1h <- as.mcmc.list(out_M1h2022, vars = "b7")
b7_M1h <- as.numeric(unlist(b77_M1h[, 1]))

sigmas_M1h <- as.mcmc.list(out_M1h2022, vars = "sigma")
s_M1h <- as.numeric(unlist(sigmas_M1h[, 1]))
nus_M1h <-  as.mcmc.list(out_M1h2022, vars = "nu")
nu_M1h <- as.numeric(unlist(nus_M1h[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1h[j*10] +
      b1_M1h[j*10] * x1[j] +
      b2_M1h[j*10] * x2[j] +
      b3_M1h[j*10] * x3[j] +
      b4_M1h[j*10] * x4[j] +
      b5_M1h[j*10] * x5[j] +
      b6_M1h[j*10] * x6[j] +
      b7_M1h[j*10] * x7[j] + s_M1h[j*10]*rt(1, nu_M1h[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1h <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1h[i*400] + 
      b1_M1h[i*400] * X1 + 
      b2_M1h[i*400] * X2 +
      b3_M1h[i*400] * X3 +
      b4_M1h[i*400] * X4 +
      b5_M1h[i*400] * X5 +
      b6_M1h[i*400] * X6 +
      b7_M1h[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1h[i*400] + 
      b1_M1h[i*400] * x1 + 
      b2_M1h[i*400] * x2 +
      b3_M1h[i*400] * x3 +
      b4_M1h[i*400] * x4 +
      b5_M1h[i*400] * x5 +
      b6_M1h[i*400] * x6 +
      b7_M1h[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1h[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1h <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1h[i*400] + 
    b1_M1h[i*400] * X1 + 
    b2_M1h[i*400] * X2 +
    b3_M1h[i*400] * X3 +
    b4_M1h[i*400] * X4 +
    b5_M1h[i*400] * X5 +
    b6_M1h[i*400] * X6 +
    b7_M1h[i*400] * X7  
  
  
  y.prd <- b0_M1h[i*400] + 
    b1_M1h[i*400] * x1 + 
    b2_M1h[i*400] * x2 +
    b3_M1h[i*400] * x3 +
    b4_M1h[i*400] * x4 +
    b5_M1h[i*400] * x5 +
    b6_M1h[i*400] * x6 +
    b7_M1h[i*400] * x7 
  
  pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1h), "\n") #  0  
## Mean Percent Variance Accounted For (PVAF): 0
# Normality check for residuals
residu <- mean(b0_M1h) + mean(b1_M1h)*x1 + mean(b2_M1h)*x2 + mean(b3_M1h)*x3 + 
  mean(b4_M1h)*x4 + mean(b5_M1h)*x5 + mean(b6_M1h)*x6 + mean(b7_M1h)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1nh_2018

modelString_M1nh_2022 = "
model{
  for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2)
    }
      
  # priors vague 
  b0 ~ dnorm(0, 1/2^2)
  b1 ~ dnorm(0.0, 1/sigmaBeta^2)
  b2 ~ dnorm(0.0, 1/sigmaBeta^2)
  b3 ~ dnorm(0.0, 1/sigmaBeta^2)
  b4 ~ dnorm(0.0, 1/sigmaBeta^2)
  b5 ~ dnorm(0.0, 1/sigmaBeta^2)
  b6 ~ dnorm(0.0, 1/sigmaBeta^2)
  b7 ~ dnorm(0.0, 1/sigmaBeta^2)
  
  # Sigma
  sigma ~ dgamma(0.01, 0.01)
  sigmaBeta ~ dgamma(2.618, 1.618) 
}
"

# MCMC run
myinits_M1nh2022 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)))

out_M1nh2022 <- run.jags(model=modelString_M1nh_2022, data=dataList, inits=myinits_M1nh2022,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "sigma", "sigmaBeta","pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M1nh2022) # pD =9.15726 DIC = -67.58924
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                            
##             Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0          0.43795    0.49351  0.55165    0.49324 0.028947   -- 0.00023475
## b1        -0.030914   0.011134 0.062033   0.013368 0.023208   --    0.00029
## b2        -0.095809  -0.005905 0.054768  -0.010981 0.036255   -- 0.00071786
## b3        -0.054395  0.0055803 0.083921  0.0093528 0.033084   --  0.0005561
## b4        -0.069738 -0.0087056 0.039553  -0.011404 0.027185   -- 0.00038276
## b5        -0.084462  -0.018073 0.024321  -0.022043 0.027802   -- 0.00048372
## b6        -0.057557 -0.0074499 0.036536 -0.0089183 0.023172   -- 0.00024317
## b7        -0.052437 0.00049611 0.068145  0.0018908 0.029041   --  0.0004424
## sigma        0.2072      0.246  0.29129    0.24765 0.021612   -- 0.00024107
## sigmaBeta 0.0028192   0.033783 0.099664   0.041382 0.029546   -- 0.00087246
##                                         
##           MC%ofSD SSeff     AC.10   psrf
## b0            0.8 15205 0.0080659 1.0002
## b1            1.2  6404  0.032088 1.0005
## b2              2  2551   0.07789 1.0055
## b3            1.7  3539  0.027562 1.0007
## b4            1.4  5044  0.016342  1.001
## b5            1.7  3303  0.054542 1.0016
## b6              1  9080  0.025251 1.0009
## b7            1.5  4309  0.028079 1.0021
## sigma         1.1  8037 0.0064764 1.0007
## sigmaBeta       3  1147   0.24506 1.0032
## 
## Model fit assessment:
## DIC = 8.30011
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 5.53077
## 
## Total time taken: 2.6 seconds
plot(out_M1nh2022) 
## Generating plots...

## PVAF --------------------------
b00_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b0")
b0_M1nh <- as.numeric(unlist(b00_M1nh[, 1]))
b11_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b1")
b1_M1nh <- as.numeric(unlist(b11_M1nh[, 1]))
b22_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b2")
b2_M1nh <- as.numeric(unlist(b22_M1nh[, 1]))
b33_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b3")
b3_M1nh <- as.numeric(unlist(b33_M1nh[, 1]))
b44_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b4")
b4_M1nh <- as.numeric(unlist(b44_M1nh[, 1]))
b55_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b5")
b5_M1nh <- as.numeric(unlist(b55_M1nh[, 1]))
b66_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b6")
b6_M1nh <- as.numeric(unlist(b66_M1nh[, 1]))
b77_M1nh <- as.mcmc.list(out_M1nh2022, vars = "b7")
b7_M1nh <- as.numeric(unlist(b77_M1nh[, 1]))

sigmas_M1nh <- as.mcmc.list(out_M1nh2022, vars = "sigma")
s_M1nh <- as.numeric(unlist(sigmas_M1nh[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots


for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1nh[j*10] +
      b1_M1nh[j*10] * x1[j] +
      b2_M1nh[j*10] * x2[j] +
      b3_M1nh[j*10] * x3[j] +
      b4_M1nh[j*10] * x4[j] +
      b5_M1nh[j*10] * x5[j] +
      b6_M1nh[j*10] * x6[j] +
      b7_M1nh[j*10] * x7[j] + rnorm(1, 0, s_M1nh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1nh <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1nh[i*400] + 
      b1_M1nh[i*400] * X1 + 
      b2_M1nh[i*400] * X2 +
      b3_M1nh[i*400] * X3 +
      b4_M1nh[i*400] * X4 +
      b5_M1nh[i*400] * X5 +
      b6_M1nh[i*400] * X6 +
      b7_M1nh[i*400] * X7 + 
      rnorm(1, 0, s_M1nh[i*400])
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1nh[i*400] + 
      b1_M1nh[i*400] * x1 + 
      b2_M1nh[i*400] * x2 +
      b3_M1nh[i*400] * x3 +
      b4_M1nh[i*400] * x4 +
      b5_M1nh[i*400] * x5 +
      b6_M1nh[i*400] * x6 +
      b7_M1nh[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1nh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1nh <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1nh[i*400] + 
    b1_M1nh[i*400] * X1 + 
    b2_M1nh[i*400] * X2 +
    b3_M1nh[i*400] * X3 +
    b4_M1nh[i*400] * X4 +
    b5_M1nh[i*400] * X5 +
    b6_M1nh[i*400] * X6 +
    b7_M1nh[i*400] * X7 
  
  y.prd <- b0_M1nh[i*400] + 
    b1_M1nh[i*400] * x1 + 
    b2_M1nh[i*400] * x2 +
    b3_M1nh[i*400] * x3 +
    b4_M1nh[i*400] * x4 +
    b5_M1nh[i*400] * x5 +
    b6_M1nh[i*400] * x6 +
    b7_M1nh[i*400] * x7 
  
  pvaf_M1nh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1nh), "\n") # 0.3857553 
## Mean Percent Variance Accounted For (PVAF): -0.02030157
# Normality check for residuals
residu <- mean(b0_M1nh) + mean(b1_M1nh)*x1 + mean(b2_M1nh)*x2 + mean(b3_M1nh)*x3 + 
  mean(b4_M1nh)*x4 + mean(b5_M1nh)*x5 + mean(b6_M1nh)*x6 + mean(b7_M1nh)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

modelString_M1xh = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b2 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b3 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b4 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b5 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b6 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b7 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    sigmaBeta ~ dgamma(2.618, 1.618)
    nu ~ dexp(0.0333)

    }
"
# MCMC run
myinits_M1xh <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)))

out_M1xh <- run.jags (model=modelString_M1xh, data=dataList, inits=myinits_M1xh,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "b23", "sigma", "sigmaBeta", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 12 variables....
## Finished running the simulation
print(out_M1xh) 
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                             
##              Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0           0.43632    0.49892  0.56359    0.49898 0.032696   -- 0.00042094
## b1         -0.032476  0.0072876 0.059423   0.010457 0.022785   -- 0.00031383
## b2          -0.09045 -0.0048874 0.061034  -0.010831 0.036121   -- 0.00090566
## b3         -0.059731  0.0040335 0.082418  0.0088926 0.033631   -- 0.00077657
## b4         -0.072062 -0.0071676 0.039396  -0.011235 0.027238   -- 0.00049464
## b5         -0.084374  -0.014387 0.028551  -0.020097 0.029045   -- 0.00052308
## b6          -0.05879 -0.0056028 0.033973 -0.0083289 0.022476   -- 0.00030494
## b7         -0.057456  0.0008095 0.062724  0.0023286 0.028508   -- 0.00054777
## b23        -0.042252 -0.0079385 0.027277 -0.0076775 0.017833   --  0.0002542
## sigma         0.1987    0.24079  0.28852    0.24194 0.022862   -- 0.00028033
## sigmaBeta 0.00053138   0.026554 0.090914   0.034864 0.030445   -- 0.00096043
## nu            3.2581     33.593   105.96     42.179   31.716   --    0.51883
##                                          
##           MC%ofSD SSeff      AC.10   psrf
## b0            1.3  6033  -0.014502 1.0001
## b1            1.4  5271   0.020621 1.0003
## b2            2.5  1591    0.15352 1.0047
## b3            2.3  1875    0.11794 1.0028
## b4            1.8  3032   0.043514 1.0022
## b5            1.8  3083   0.049011 1.0003
## b6            1.4  5433   0.006346 1.0005
## b7            1.9  2709   0.050711 1.0006
## b23           1.4  4921   0.021998 1.0001
## sigma         1.2  6651 0.00020335 1.0004
## sigmaBeta     3.2  1005    0.29126 1.0044
## nu            1.6  3737  -0.002607 1.0009
## 
## Model fit assessment:
## DIC = 11.16171
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 6.53267
## 
## Total time taken: 50.4 seconds
plot(out_M1xh) 
## Generating plots...

## PVAF --------------------------
b00_M1xh <- as.mcmc.list(out_M1xh, vars = "b0")
b0_M1xh <- as.numeric(unlist(b00_M1xh[, 1]))
b11_M1xh <- as.mcmc.list(out_M1xh, vars = "b1")
b1_M1xh <- as.numeric(unlist(b11_M1xh[, 1]))
b22_M1xh <- as.mcmc.list(out_M1xh, vars = "b")
b2_M1xh <- as.numeric(unlist(b22_M1xh[, 1]))
b33_M1xh <- as.mcmc.list(out_M1xh, vars = "b3")
b3_M1xh <- as.numeric(unlist(b33_M1xh[, 1]))
b44_M1xh <- as.mcmc.list(out_M1xh, vars = "b4")
b4_M1xh <- as.numeric(unlist(b44_M1xh[, 1]))
b55_M1xh <- as.mcmc.list(out_M1xh, vars = "b5")
b5_M1xh <- as.numeric(unlist(b55_M1xh[, 1]))
b66_M1xh <- as.mcmc.list(out_M1xh, vars = "b6")
b6_M1xh <- as.numeric(unlist(b66_M1xh[, 1]))
b77_M1xh <- as.mcmc.list(out_M1xh, vars = "b7")
b7_M1xh <- as.numeric(unlist(b77_M1xh[, 1]))
b233_M1xh <- as.mcmc.list(out_M1xh, vars = "b23")
b23_M1xh <- as.numeric(unlist(b233_M1xh[, 1]))

sigmas_M1xh <- as.mcmc.list(out_M1xh, vars = "sigma")
s_M1xh <- as.numeric(unlist(sigmas_M1xh[, 1]))
nus_M1xh <-  as.mcmc.list(out_M1xh, vars = "nu")
nu_M1xh <- as.numeric(unlist(nus_M1xh[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xh[j*10] +
      b1_M1xh[j*10] * x1[j] +
      b2_M1xh[j*10] * x2[j] +
      b3_M1xh[j*10] * x3[j] +
      b4_M1xh[j*10] * x4[j] +
      b5_M1xh[j*10] * x5[j] +
      b6_M1xh[j*10] * x6[j] +
      b7_M1xh[j*10] * x7[j] + b23_M1xh[j*10]*x2[j]*x3[j] + s_M1xh[j*10]*rt(1, nu_M1xh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xh[i*400] + 
      b1_M1xh[i*400] * X1 + 
      b2_M1xh[i*400] * X2 +
      b3_M1xh[i*400] * X3 +
      b4_M1xh[i*400] * X4 +
      b5_M1xh[i*400] * X5 +
      b6_M1xh[i*400] * X6 +
      b7_M1xh[i*400] * X7 + b23_M1xh[i*400]*X2*X3
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xh[i*400] +
      b1_M1xh[i*400] * x1 +
      b2_M1xh[i*400] * x2 +
      b3_M1xh[i*400] * x3 +
      b4_M1xh[i*400] * x4 +
      b5_M1xh[i*400] * x5 +
      b6_M1xh[i*400] * x6 +
      b7_M1xh[i*400] * x7 + b23_M1xh[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1x <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xh[i*400] + 
    b1_M1xh[i*400] * X1 + 
    b2_M1xh[i*400] * X2 +
    b3_M1xh[i*400] * X3 +
    b4_M1xh[i*400] * X4 +
    b5_M1xh[i*400] * X5 +
    b6_M1xh[i*400] * X6 +
    b7_M1xh[i*400] * X7 + b23_M1xh[i*100]*X2*X3
  
  
  y.prd <- b0_M1xh[i*400] +
    b1_M1xh[i*400] * x1 +
    b2_M1xh[i*400] * x2 +
    b3_M1xh[i*400] * x3 +
    b4_M1xh[i*400] * x4 +
    b5_M1xh[i*400] * x5 +
    b6_M1xh[i*400] * x6 +
    b7_M1xh[i*400] * x7 + b23_M1xh[i*400]*x2*x3
  
  pvaf_M1x[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1x), "\n") #-0.1799237 
## Mean Percent Variance Accounted For (PVAF): -4.692856
# Normality check for residuals
residu <- mean(b0_M1xh) + mean(b1_M1xh)*x1 + mean(b2_M1xh)*x2 + mean(b3_M1xh)*x3 + 
  mean(b4_M1xh)*x4 + mean(b5_M1xh)*x5 + mean(b6_M1xh)*x6 + mean(b7_M1xh)*x7 + mean(b23_M1xh)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

# Define the model with no individual differences
modelString_M1xnh = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ dnorm(0.0, 1/sigmaBeta^2)
    b2 ~ dnorm(0.0, 1/sigmaBeta^2)
    b3 ~ dnorm(0.0, 1/sigmaBeta^2)
    b4 ~ dnorm(0.0, 1/sigmaBeta^2)
    b5 ~ dnorm(0.0, 1/sigmaBeta^2)
    b6 ~ dnorm(0.0, 1/sigmaBeta^2)
    b7 ~ dnorm(0.0, 1/sigmaBeta^2)
    b23 ~ dnorm(0, 1/2^2)
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    sigmaBeta ~ dgamma(2.618, 1.618)
    
    }
"
# MCMC run
myinits_M1xnh <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)))

out_M1xnh <- run.jags (model=modelString_M1xnh, data=dataList, inits=myinits_M1xnh,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "b23", "sigma", "sigmaBeta", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1xnh) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                            
##             Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0          0.43664    0.50073  0.56665    0.50084 0.033016   -- 0.00033541
## b1        -0.031925  0.0091063 0.061087   0.011502 0.023516   -- 0.00029321
## b2        -0.095789 -0.0051982 0.063264  -0.010995 0.038783   -- 0.00082498
## b3        -0.053985  0.0056468 0.089102   0.010469 0.034812   -- 0.00066185
## b4        -0.070055 -0.0090438 0.039771  -0.012469 0.027147   -- 0.00037468
## b5        -0.081908  -0.015586 0.031079  -0.020121 0.028642   -- 0.00051051
## b6        -0.062012 -0.0075165 0.034219 -0.0096819 0.023533   -- 0.00026636
## b7        -0.057723  0.0010486 0.068359  0.0030512 0.029969   -- 0.00044945
## b23       -0.041735  -0.008891   0.0238 -0.0088374 0.016821   -- 0.00017722
## sigma       0.20881    0.24755  0.29289    0.24907 0.021551   --  0.0002337
## sigmaBeta 0.0027297   0.033453  0.10212   0.041778 0.032232   --   0.001094
##                                          
##           MC%ofSD SSeff      AC.10   psrf
## b0              1  9690 -0.0088745 1.0001
## b1            1.2  6432   0.015428 1.0011
## b2            2.1  2210   0.095614 1.0022
## b3            1.9  2767    0.07047 1.0012
## b4            1.4  5250   0.029837 1.0012
## b5            1.8  3148   0.073496 1.0016
## b6            1.1  7806    0.01768 1.0009
## b7            1.5  4446   0.011504  1.002
## b23           1.1  9009  -0.015894 1.0002
## sigma         1.1  8504 -0.0009852      1
## sigmaBeta     3.4   868    0.32252 1.0038
## 
## Model fit assessment:
## DIC = 10.03237
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 6.47588
## 
## Total time taken: 2.7 seconds
plot(out_M1xnh) 
## Generating plots...

## PVAF --------------------------
b00_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b0")
b0_M1xnh <- as.numeric(unlist(b00_M1xnh[, 1]))
b11_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b1")
b1_M1xnh <- as.numeric(unlist(b11_M1xnh[, 1]))
b22_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b")
b2_M1xnh <- as.numeric(unlist(b22_M1xnh[, 1]))
b33_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b3")
b3_M1xnh <- as.numeric(unlist(b33_M1xnh[, 1]))
b44_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b4")
b4_M1xnh <- as.numeric(unlist(b44_M1xnh[, 1]))
b55_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b5")
b5_M1xnh <- as.numeric(unlist(b55_M1xnh[, 1]))
b66_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b6")
b6_M1xnh <- as.numeric(unlist(b66_M1xnh[, 1]))
b77_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b7")
b7_M1xnh <- as.numeric(unlist(b77_M1xnh[, 1]))
b233_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b23")
b23_M1xnh <- as.numeric(unlist(b233_M1xnh[, 1]))


sigmas_M1xnh <- as.mcmc.list(out_M1xnh, vars = "sigma")
s_M1xnh <- as.numeric(unlist(sigmas_M1xnh[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots


for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xnh[j*10] +
      b1_M1xnh[j*10] * x1[j] +
      b2_M1xnh[j*10] * x2[j] +
      b3_M1xnh[j*10] * x3[j] +
      b4_M1xnh[j*10] * x4[j] +
      b5_M1xnh[j*10] * x5[j] +
      b6_M1xnh[j*10] * x6[j] +
      b7_M1xnh[j*10] * x7[j] + b23_M1xnh[j*10]*x2[j]*x3[j] + rnorm(1, 0, s_M1xnh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1xnh <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xnh[i*400] + 
      b1_M1xnh[i*400] * X1 + 
      b2_M1xnh[i*400] * X2 +
      b3_M1xnh[i*400] * X3 +
      b4_M1xnh[i*400] * X4 +
      b5_M1xnh[i*400] * X5 +
      b6_M1xnh[i*400] * X6 +
      b7_M1xnh[i*400] * X7 + 
      b23_M1xnh[i*400]*X2*X3
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xnh[i*400] + 
      b1_M1xnh[i*400] * x1 + 
      b2_M1xnh[i*400] * x2 +
      b3_M1xnh[i*400] * x3 +
      b4_M1xnh[i*400] * x4 +
      b5_M1xnh[i*400] * x5 +
      b6_M1xnh[i*400] * x6 +
      b7_M1xnh[i*400] * x7 + 
      b23_M1xnh[i*400]*x2*x3 
    
    # Calculate PVAF
    pvaf_M1xnh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1xnh <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xnh[i*400] + 
    b1_M1xnh[i*400] * X1 + 
    b2_M1xnh[i*400] * X2 +
    b3_M1xnh[i*400] * X3 +
    b4_M1xnh[i*400] * X4 +
    b5_M1xnh[i*400] * X5 +
    b6_M1xnh[i*400] * X6 +
    b7_M1xnh[i*400] * X7 +  
    b23_M1xnh[i*400]*X2*X3 
  
  y.prd <- b0_M1xnh[i*400] + 
    b1_M1xnh[i*400] * x1 + 
    b2_M1xnh[i*400] * x2 +
    b3_M1xnh[i*400] * x3 +
    b4_M1xnh[i*400] * x4 +
    b5_M1xnh[i*400] * x5 +
    b6_M1xnh[i*400] * x6 +
    b7_M1xnh[i*400] * x7 +
    b23_M1xnh[i*400]*x2*x3
  
  pvaf_M1xnh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1xnh), "\n") # 0.3857553 
## Mean Percent Variance Accounted For (PVAF): -4.49073
# Normality check for residuals
residu <- mean(b0_M1xnh) + mean(b1_M1xnh)*x1 + mean(b2_M1xnh)*x2 + mean(b3_M1xnh)*x3 + 
  mean(b4_M1xnh)*x4 + mean(b5_M1xnh)*x5 + mean(b6_M1xnh)*x6 + mean(b7_M1xnh)*x7 + mean(b23_M1xnh)*x2*x3- y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)